-use std::io::IoError;
-use std::io;
use std::str;
use std::collections::{HashMap, HashSet};
use util;
use util::{CargoResult, ChainError, internal, Config};
+use super::layout::{Layout, LayoutProxy};
+
#[deriving(Show)]
pub enum PlatformRequirement {
Target,
pub config: &'b mut Config<'b>,
pub resolve: &'a Resolve,
- dest: Path,
- host_dest: Path,
- deps_dir: Path,
- host_deps_dir: Path,
+ env: &'a str,
+ host: Layout,
+ target: Option<Layout>,
host_dylib: (String, String),
package_set: &'a PackageSet,
target_dylib: (String, String),
}
impl<'a, 'b> Context<'a, 'b> {
- pub fn new(resolve: &'a Resolve, deps: &'a PackageSet,
+ pub fn new(env: &'a str, resolve: &'a Resolve, deps: &'a PackageSet,
config: &'b mut Config<'b>,
- dest: Path, deps_dir: Path,
- host_dest: Path, host_deps_dir: Path)
+ host: Layout, target: Option<Layout>)
-> CargoResult<Context<'a, 'b>> {
let target_dylib = try!(Context::dylib_parts(config.target()));
let host_dylib = if config.target().is_none() {
};
Ok(Context {
rustc_version: try!(Context::rustc_version()),
- dest: dest,
- deps_dir: deps_dir,
+ env: env,
+ host: host,
+ target: target,
primary: false,
resolve: resolve,
package_set: deps,
target_dylib: target_dylib,
host_dylib: host_dylib,
requirements: HashMap::new(),
- host_dest: host_dest,
- host_deps_dir: host_deps_dir,
})
}
/// Prepare this context, ensuring that all filesystem directories are in
/// place.
pub fn prepare(&mut self, pkg: &'a Package) -> CargoResult<()> {
- debug!("creating target dir; path={}", self.dest.display());
-
- try!(self.mk_target(&self.dest).chain_error(||
- internal(format!("Couldn't create the target directory for {} at {}",
- pkg.get_name(), self.dest.display()))));
- try!(self.mk_target(&self.host_dest).chain_error(||
- internal(format!("Couldn't create the host directory for {} at {}",
- pkg.get_name(), self.dest.display()))));
-
- try!(self.mk_target(&self.deps_dir).chain_error(||
- internal(format!("Couldn't create the directory for dependencies for {} at {}",
- pkg.get_name(), self.deps_dir.display()))));
-
- try!(self.mk_target(&self.host_deps_dir).chain_error(||
- internal(format!("Couldn't create the directory for dependencies for {} at {}",
- pkg.get_name(), self.deps_dir.display()))));
+ try!(self.host.prepare().chain_error(|| {
+ internal(format!("couldn't prepare build directories for `{}`",
+ pkg.get_name()))
+ }));
+ match self.target {
+ Some(ref mut target) => {
+ try!(target.prepare().chain_error(|| {
+ internal(format!("couldn't prepare build directories \
+ for `{}`", pkg.get_name()))
+ }));
+ }
+ None => {}
+ }
let targets = pkg.get_targets().iter();
for target in targets.filter(|t| t.get_profile().is_compile()) {
Ok(())
}
- fn mk_target(&self, target: &Path) -> Result<(), IoError> {
- io::fs::mkdir_recursive(target, io::UserRWX)
- }
-
fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target,
req: PlatformRequirement,
visiting: &mut HashSet<&'a PackageId>) {
self.primary = true;
}
- /// Return the destination directory for output.
- pub fn dest<'a>(&'a self, plugin: bool) -> &'a Path {
- if self.primary {
- if plugin {&self.host_dest} else {&self.dest}
+ /// Returns the appropriate directory layout for either a plugin or not.
+ pub fn layout<'a>(&'a self, plugin: bool) -> LayoutProxy<'a> {
+ if plugin {
+ LayoutProxy::new(&self.host, self.primary)
} else {
- self.deps_dir(plugin)
+ LayoutProxy::new(self.target.as_ref().unwrap_or(&self.host),
+ self.primary)
}
}
- /// Return the destination directory for dependencies.
- pub fn deps_dir<'a>(&'a self, plugin: bool) -> &'a Path {
- if plugin {&self.host_deps_dir} else {&self.deps_dir}
- }
-
/// Return the (prefix, suffix) pair for dynamic libraries.
///
/// If `plugin` is true, the pair corresponds to the host platform,
if target.is_rlib() {
ret.push(format!("lib{}.rlib", stem));
}
+ if target.is_bin() {
+ ret.push(stem.to_string());
+ }
assert!(ret.len() > 0);
return ret;
}
.expect("Should have found package")
})
.filter_map(|pkg| {
- pkg.get_targets().iter().find(|&t| {
- t.is_lib() && t.get_profile().is_compile()
- }).map(|t| (pkg, t))
+ pkg.get_targets().iter().find(|&t| self.is_relevant_target(t))
+ .map(|t| (pkg, t))
})
.collect()
}
+
+ pub fn is_relevant_target(&self, target: &Target) -> bool {
+ target.is_lib() && match self.env {
+ "test" => target.get_profile().is_compile(),
+ _ => target.get_profile().get_env() == self.env,
+ }
+ }
}
impl PlatformRequirement {
use std::hash::Hasher;
use std::hash::sip::SipHasher;
-use std::io::File;
+use std::io::{fs, File};
use core::{Package, Target};
use util;
/// it as the first part of the return tuple. It will then prepare a job to
/// update the fingerprint if this package is actually rebuilt as part of
/// compilation, returning the job as the second part of the tuple.
+///
+/// The third part of the tuple is a job to run when a package is discovered to
+/// be fresh to ensure that all of its artifacts are moved to the correct
+/// location.
pub fn prepare(cx: &mut Context, pkg: &Package,
- targets: &[&Target]) -> CargoResult<(Freshness, Job)> {
- let short = short_hash(pkg.get_package_id());
- let fingerprint_loc = cx.dest(false).join(format!(".{}.{}.fingerprint",
- pkg.get_name(),
- short));
+ targets: &[&Target]) -> CargoResult<(Freshness, Job, Job)> {
+ let filename = format!(".{}.{}.fingerprint", pkg.get_name(),
+ short_hash(pkg.get_package_id()));
+ let filename = filename.as_slice();
+ let (old_fingerprint_loc, new_fingerprint_loc) = {
+ let layout = cx.layout(false);
+ (layout.old_root().join(filename), layout.root().join(filename))
+ };
- let (is_fresh, fingerprint) = try!(is_fresh(pkg, &fingerprint_loc,
+ // First, figure out if the old location exists, and if it does whether it's
+ // still fresh or not.
+ let (is_fresh, fingerprint) = try!(is_fresh(pkg, &old_fingerprint_loc,
cx, targets));
+
+ // Prepare a job to update the location of the new fingerprint.
+ let new_fingerprint_loc2 = new_fingerprint_loc.clone();
let write_fingerprint = Job::new(proc() {
- try!(File::create(&fingerprint_loc).write_str(fingerprint.as_slice()));
+ let mut f = try!(File::create(&new_fingerprint_loc2));
+ try!(f.write_str(fingerprint.as_slice()));
Ok(Vec::new())
});
- Ok((if is_fresh {Fresh} else {Dirty}, write_fingerprint))
+
+ // Prepare a job to copy over all old artifacts into their new destination.
+ let mut pairs = Vec::new();
+ pairs.push((old_fingerprint_loc, new_fingerprint_loc));
+ for &target in targets.iter() {
+ for filename in cx.target_filenames(target).iter() {
+ let filename = filename.as_slice();
+ let layout = cx.layout(target.get_profile().is_plugin());
+ pairs.push((layout.old_root().join(filename),
+ layout.root().join(filename)));
+ }
+ }
+ let move_old = Job::new(proc() {
+ for &(ref src, ref dst) in pairs.iter() {
+ try!(fs::rename(src, dst));
+ }
+ Ok(Vec::new())
+ });
+
+ Ok((if is_fresh {Fresh} else {Dirty}, write_fingerprint, move_old))
}
fn is_fresh(dep: &Package, loc: &Path, cx: &mut Context, targets: &[&Target])
pub struct JobQueue<'a, 'b> {
pool: TaskPool,
- queue: DependencyQueue<'a, (&'a Package, Job)>,
+ queue: DependencyQueue<'a, (&'a Package, (Job, Job))>,
tx: Sender<Message>,
rx: Receiver<Message>,
active: HashMap<&'a PackageId, uint>,
impl<'a, 'b> JobQueue<'a, 'b> {
pub fn new(config: &'b mut Config<'b>,
resolve: &'a Resolve,
- jobs: Vec<(&'a Package, Freshness, Job)>) -> JobQueue<'a, 'b> {
+ jobs: Vec<(&'a Package, Freshness, (Job, Job))>)
+ -> JobQueue<'a, 'b> {
let (tx, rx) = channel();
let mut queue = DependencyQueue::new();
for &(pkg, _, _) in jobs.iter() {
while self.queue.len() > 0 {
loop {
match self.queue.dequeue() {
- Some((id, Fresh, (pkg, _))) => {
+ Some((id, Fresh, (pkg, (_, fresh)))) => {
assert!(self.active.insert(id, 1u));
try!(self.config.shell().status("Fresh", pkg));
self.tx.send((id.clone(), Fresh, Ok(Vec::new())));
+ try!(fresh.run());
}
- Some((id, Dirty, (pkg, job))) => {
+ Some((id, Dirty, (pkg, (dirty, _)))) => {
assert!(self.active.insert(id, 1));
try!(self.config.shell().status("Compiling", pkg));
let my_tx = self.tx.clone();
let id = id.clone();
- self.pool.execute(proc() my_tx.send((id, Dirty, job.run())));
+ self.pool.execute(proc() my_tx.send((id, Dirty, dirty.run())));
}
None => break,
}
--- /dev/null
+//! Management of the directory layout of a build
+//!
+//! The directory layout is a little tricky at times, hence a separate file to
+//! house this logic. The current layout looks like this:
+//!
+//! # This is the root directory for all output, the top-level package
+//! # places all of its output here.
+//! target/
+//!
+//! # This is the root directory for all output of *dependencies*
+//! deps/
+//!
+//! # This is a temporary directory as part of the build process. When a
+//! # build starts, it initially moves the old `deps` directory to this
+//! # location. This is done to ensure that there are no stale artifacts
+//! # lying around in the build directory which may cause a build to
+//! # succeed where it would fail elsewhere.
+//! #
+//! # If a package is determined to be fresh, its files are moved out of
+//! # this directory and back into `deps`.
+//! old-deps/
+//!
+//! # Similar to old-deps, this is where all of the output under
+//! # `target/` is moved at the start of a build.
+//! old-root/
+
+use std::io;
+use std::io::{fs, IoResult};
+
+pub struct Layout {
+ root: Path,
+ deps: Path,
+
+ old_deps: Path,
+ old_root: Path,
+}
+
+pub struct LayoutProxy<'a> {
+ root: &'a Layout,
+ primary: bool,
+}
+
+impl Layout {
+ pub fn new(root: Path) -> Layout {
+ Layout {
+ deps: root.join("deps"),
+ old_deps: root.join("old-deps"),
+ old_root: root.join("old-root"),
+ root: root,
+ }
+ }
+
+ pub fn prepare(&mut self) -> IoResult<()> {
+ if !self.root.exists() {
+ try!(fs::mkdir_recursive(&self.root, io::UserRWX));
+ }
+
+ if self.old_deps.exists() {
+ try!(fs::rmdir_recursive(&self.old_deps));
+ }
+ if self.old_root.exists() {
+ try!(fs::rmdir_recursive(&self.old_root));
+ }
+ if self.deps.exists() {
+ try!(fs::rename(&self.deps, &self.old_deps));
+ }
+
+ try!(fs::mkdir(&self.deps, io::UserRWX));
+ try!(fs::mkdir(&self.old_root, io::UserRWX));
+
+ for file in try!(fs::readdir(&self.root)).iter() {
+ if !file.is_file() { continue }
+
+ try!(fs::rename(file, &self.old_root.join(file.filename().unwrap())));
+ }
+
+ Ok(())
+ }
+
+ pub fn dest<'a>(&'a self) -> &'a Path { &self.root }
+ pub fn deps<'a>(&'a self) -> &'a Path { &self.deps }
+ pub fn old_dest<'a>(&'a self) -> &'a Path { &self.old_root }
+ pub fn old_deps<'a>(&'a self) -> &'a Path { &self.old_deps }
+}
+
+impl Drop for Layout {
+ fn drop(&mut self) {
+ let _ = fs::rmdir_recursive(&self.old_deps);
+ let _ = fs::rmdir_recursive(&self.old_root);
+ }
+}
+
+impl<'a> LayoutProxy<'a> {
+ pub fn new(root: &'a Layout, primary: bool) -> LayoutProxy<'a> {
+ LayoutProxy {
+ root: root,
+ primary: primary,
+ }
+ }
+
+ pub fn root(&self) -> &'a Path {
+ if self.primary {self.root.dest()} else {self.root.deps()}
+ }
+ pub fn deps(&self) -> &'a Path { self.root.deps() }
+
+ pub fn old_root(&self) -> &'a Path {
+ if self.primary {self.root.old_dest()} else {self.root.old_deps()}
+ }
+}
mod fingerprint;
mod job;
mod job_queue;
+mod layout;
type Args = Vec<String>;
debug!("compile_targets; targets={}; pkg={}; deps={}", targets, pkg, deps);
- let host_dir = pkg.get_absolute_target_dir()
- .join(uniq_target_dest(targets).unwrap_or(""));
- let host_deps_dir = host_dir.join("deps");
-
- let target_dir = pkg.get_absolute_target_dir()
- .join(config.target().unwrap_or(""))
- .join(uniq_target_dest(targets).unwrap_or(""));
- let deps_target_dir = target_dir.join("deps");
+ let root = pkg.get_absolute_target_dir();
+ let dest = uniq_target_dest(targets).unwrap_or("");
+ let host_layout = layout::Layout::new(root.join(dest));
+ let target_layout = config.target().map(|target| {
+ layout::Layout::new(root.join(target).join(dest))
+ });
- let mut cx = try!(Context::new(resolve, deps, config,
- target_dir, deps_target_dir,
- host_dir, host_deps_dir));
+ let mut cx = try!(Context::new(env, resolve, deps, config,
+ host_layout, target_layout));
// First ensure that the destination directory exists
try!(cx.prepare(pkg));
// Only compile lib targets for dependencies
let targets = dep.get_targets().iter().filter(|target| {
- target.is_lib() && match env {
- "test" => target.get_profile().is_compile(),
- _ => target.get_profile().get_env() == env,
- }
+ cx.is_relevant_target(*target)
}).collect::<Vec<&Target>>();
try!(compile(targets.as_slice(), dep, &mut cx, &mut jobs));
fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package,
cx: &mut Context<'a, 'b>,
- jobs: &mut Vec<(&'a Package, Freshness, Job)>)
+ jobs: &mut Vec<(&'a Package, Freshness, (Job, Job))>)
-> CargoResult<()> {
debug!("compile_pkg; pkg={}; targets={}", pkg, targets);
// TODO: Can a fingerprint be per-target instead of per-package? Doing so
// would likely involve altering the granularity of key for the
// dependency queue that is later used to run jobs.
- let (freshness, write_fingerprint) =
+ let (freshness, write_fingerprint, copy_old) =
try!(fingerprint::prepare(cx, pkg, targets));
// Note that we build the job backwards because each job will produce more
let build_libs = Job::all(libs, bins);
let job = Job::all(build_cmds, vec![build_libs]);
- jobs.push((pkg, freshness, job));
+ jobs.push((pkg, freshness, (job, copy_old)));
Ok(())
}
let mut cmd = cmd.split(' ');
// TODO: this shouldn't explicitly pass `false` for dest/deps_dir, we may
// be building a C lib for a plugin
+ let layout = cx.layout(false);
let mut p = util::process(cmd.next().unwrap())
.cwd(pkg.get_root())
- .env("OUT_DIR", Some(cx.dest(false).as_str()
- .expect("non-UTF8 dest path")))
- .env("DEPS_DIR", Some(cx.deps_dir(false).as_str()
- .expect("non-UTF8 deps path")))
+ .env("OUT_DIR", Some(layout.root().as_str()
+ .expect("non-UTF8 dest path")))
+ .env("DEPS_DIR", Some(layout.deps().as_str()
+ .expect("non-UTF8 deps path")))
.env("TARGET", cx.config.target());
for arg in cmd {
p = p.arg(arg);
let crate_types = target.rustc_crate_types();
let root = package.get_root();
- log!(5, "root={}; target={}; crate_types={}; dest={}; deps={}; \
- verbose={}; req={}",
- root.display(), target, crate_types, cx.dest(false).display(),
- cx.deps_dir(false).display(), cx.primary, req);
+ log!(5, "root={}; target={}; crate_types={}; verbose={}; req={}",
+ root.display(), target, crate_types, cx.primary, req);
let primary = cx.primary;
let rustcs = prepare_rustc(package, target, crate_types, cx, req);
}
into.push("--out-dir".to_string());
- into.push(cx.dest(plugin).display().to_string());
+ into.push(cx.layout(plugin).root().display().to_string());
if !plugin {
fn opt(into: &mut Vec<String>, key: &str, prefix: &str,
fn build_deps_args(dst: &mut Args, package: &Package, cx: &Context,
plugin: bool) {
+ let layout = cx.layout(plugin);
dst.push("-L".to_string());
- dst.push(cx.dest(plugin).display().to_string());
+ dst.push(layout.root().display().to_string());
dst.push("-L".to_string());
- dst.push(cx.deps_dir(plugin).display().to_string());
+ dst.push(layout.deps().display().to_string());
for &(_, target) in cx.dep_targets(package).iter() {
+ let layout = cx.layout(target.get_profile().is_plugin());
for filename in cx.target_filenames(target).iter() {
dst.push("--extern".to_string());
dst.push(format!("{}={}/{}",
target.get_name(),
- cx.deps_dir(target.get_profile().is_plugin()).display(),
+ layout.deps().display(),
filename));
}
}
assert_that(p.cargo_process("cargo-build"), execs().with_status(0));
assert_that(process(p.bin("foo")), execs().with_status(0));
})
+
+test!(deletion_causes_failure {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#)
+ .file("src/main.rs", r#"
+ extern crate bar;
+ fn main() {}
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("bar/src/lib.rs", "");
+
+ assert_that(p.cargo_process("cargo-build"), execs().with_status(0));
+ let p = p.file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#);
+ assert_that(p.cargo_process("cargo-build"), execs().with_status(101));
+})
COMPILING, baz.display(),
COMPILING, bar.display(),
COMPILING, p.root().display())));
+ assert_that(&p.bin("foo"), existing_file());
assert_that(p.process(cargo_dir().join("cargo-build")),
execs().with_stdout(format!("{} baz v0.5.0 (file:{})\n\
{} bar v0.5.0 (file:{})\n\
FRESH, baz.display(),
FRESH, bar.display(),
FRESH, p.root().display())));
+ assert_that(&p.bin("foo"), existing_file());
})
test!(nested_deps_recompile {